One may filter different features in the neural signals. Here it is investigated which preprocessing steps are suitable in this respect.
from skimage import io
import skimage
import numpy as np
import matplotlib.pyplot as plt
from scipy.ndimage import gaussian_filter, uniform_filter
import pickle
import imageio
from pathlib import Path
from matplotlib.pyplot import show
from argparse import ArgumentParser
from pyoptflow import HornSchunck, getimgfiles
from pyoptflow.plots import compareGraphs
from PIL import Image
import os
from scipy.signal import argrelextrema
from skimage import exposure
import matplotlib
import matplotlib.animation
from IPython.display import HTML
matplotlib.rcParams['animation.embed_limit'] = 2**128
np.array(np.clip([300],0,255), dtype=np.uint8)
import sys
%reload_ext autoreload
%autoreload 2
sys.path.append('..')
from utils.visualization_tools import *
import utils.visualization_tools
from utils.data_transformations import *
import utils.data_transformations
from utils.diverse import *
import utils.diverse
The following modules are available
print_module_methods(utils.diverse)
print_module_methods(utils.visualization_tools)
print_module_methods(utils.data_transformations)
from pathlib import Path
source_folder = os.path.join(Path(os.getcwd()).parent, "source_data")
frames = skimage.io.imread(os.path.join(source_folder,"runstart16_X1.tif"))
plt.imshow(frames[0,:,:])
frames.shape
frames = frames[:1000,:,:]
Here I calculate the difference from pixelwise mean as well as a smoothed version that promised to increase the signal to noise ratio.
mean = np.mean(frames,axis=0)#pixelwise mean
difference = normalize(framewise_difference(frames, mean, bigdata=False))
smooth = normalize(gaussian_filter(substract_pixel_min(difference), 1))
framewise_normalized = (np.array([normalize(frame) for frame in smooth]))
smoother = normalize(uniform_filter(framewise_normalized,[10,20,20]))# [20,30,30]))
details = framewise_normalized-smoother
details = np.array([normalize(frame) for frame in smooth])
#details = normalize(details)
contrast_enhanced = clipped_adaptive(details)
For visualization of the slow waves total activation
%%capture
fig, ax = plt.subplots(1, figsize=(10,10))
im = ax.imshow(smooth[0,:,:], vmin =.3, vmax=.5)#vmin=.25,vmax=.3)
startframe = 70
ani = matplotlib.animation.FuncAnimation(fig, lambda i: im.set_array(smooth[startframe+i]), frames=30).to_jshtml()
HTML(ani)
For visualization of nuances of small scale travelling peaks in the activation by linear scaling mapping the lowest value to 0 and the highest to 1.
%%capture
import matplotlib.animation
from IPython.display import HTML
fig, ax = plt.subplots(1, figsize=(10,10))
def display(frame):
global fig, ax
ax.cla()
im = ax.imshow(frame,vmin=0,vmax=1)#NORMALIZED FRAME HERE
return fig, ax
startframe = 50
ani = matplotlib.animation.FuncAnimation(fig, lambda i: display(framewise_normalized[startframe+i]), frames=20).to_jshtml()
HTML(ani)
%%capture
import matplotlib.animation
from IPython.display import HTML
fig, ax = plt.subplots(1, figsize=(10,10))
def display(frame):
global fig, ax
ax.cla()
im = ax.imshow(frame)#,vmin=.1,vmax=1)
return fig, ax
startframe = 50
ani = matplotlib.animation.FuncAnimation(fig, lambda i: display(details[startframe+i]), frames=20).to_jshtml()
HTML(ani)
%%capture
import matplotlib.animation
from IPython.display import HTML
fig, ax = plt.subplots(1, figsize=(10,10))
def display(frame):
global fig, ax
ax.cla()
im = ax.imshow(frame,vmin=.0,vmax=1)#NORMALIZE ROI FRAME HERE
return fig, ax
startframe = 0
ani = matplotlib.animation.FuncAnimation(fig, lambda i: display(contrast_enhanced[startframe+i]), frames=20).to_jshtml()
HTML(ani)
def sample_frame_in_roi(frame, window_size, left, right, top, bottom):
further_preprocessed = exposure.equalize_adapthist(normalize(frame[left:right,top:bottom]), clip_limit=0.03)
further_preprocessed = further_preprocessed[:window_size-8,:window_size-8]
return further_preprocessed
def sample_roi(tensor, start_frame, stop_frame, window_size = 60,left = 120, top = 80,):
right = left + window_size
bottom = top + window_size
return np.array([sample_frame_in_roi(tensor[i], window_size, left, right, top, bottom) for i in range(start_frame,stop_frame)])
np.save("roi_background.npy",sample_roi(frames,0,400))
roi = sample_roi(details,0,400)
np.save("roi.npy",roi)
roi.shape
%%capture
fig, ax = plt.subplots(1, figsize=(10,10))
im = ax.imshow(smooth[0,:,:], vmin =.6, vmax=.8)
startframe = 0
ani = matplotlib.animation.FuncAnimation(fig, lambda i: im.set_array(roi[startframe+i]), frames=20).to_jshtml()
open("anim.html","w").write(ani)
HTML(ani)
There are two ways to look at this:
- Either there are physical clusters of neurons and they talk to each other
- Or there are liquid informational codes and they travel
upper_decentile_roi = np.array([np.quantile(f,0.9) for f in roi])
signal = upper_decentile_roi - gaussian_filter(upper_decentile_roi, 5)
x, freq = fourier(signal)
fig, ax = plt.subplots(2)
ax[0].plot(signal-np.mean(signal))
ax[1].plot(x,freq)
Fourier plot does not indicate any dominant frequencies
x_comp, y_comp = horn_schunck(contrast_enhanced, 200)
%%capture
fig, ax = display_combined(x_comp[0],y_comp[0], details[1])
start = 25
frames = 10
def animate(i):
global start
i += start
print(".", end ="")
display_combined(x_comp[i]/10,y_comp[i]/10, details[i+1], fig=fig, ax=ax)
#Q.set_UVC(np.flipud(rescaled[:,:,0]), -np.flipud(rescaled[:,:,1]))
ani = matplotlib.animation.FuncAnimation(fig, animate, frames=frames)
from IPython.display import HTML
HTML(ani.to_jshtml())
roi = sample_roi(details,0,100)
x_comp, y_comp = horn_schunck(roi, len(roi)-1)
%%capture
fig, ax = display_combined(x_comp[0],y_comp[0], details[1])
start = 0
frames = 10
def animate(i):
i += start
print(".", end ="")
display_combined(x_comp[i]/5,y_comp[i]/5, roi[i+1], fig=fig, ax=ax, scale=10, quivstep=1)
#Q.set_UVC(np.flipud(rescaled[:,:,0]), -np.flipud(rescaled[:,:,1]))
ani = matplotlib.animation.FuncAnimation(fig, animate, frames=frames)
HTML(ani.to_jshtml())
One can filter small scale motion patterns and largescale dynamics. The big size of the data represents a challange becuase of working memory restrictions when using NumPy methods directly. Custom methods can help to reduce the memory requirements. Developing scripts that run in a computational grid on computers with large memory capacities could also help.